import numpy as np
import tensorflow.compat.v2 as tf
tf.enable_v2_behavior()
import pandas as pd
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import RobustScaler
from sklearn.preprocessing import MinMaxScaler
from matplotlib import pyplot
import plotly.graph_objects as go
import math
import seaborn as sns
from sklearn.metrics import mean_squared_error
np.random.seed(1)
tf.random.set_seed(1)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, GRU, Dropout, RepeatVector, TimeDistributed
from keras import backend
MODELFILENAME = 'MODELS/LSTM_1d_TFM'
TIME_STEPS=144 #1d
CMODEL = LSTM
UNITS=40
DROPOUT=0.405
ACTIVATION='tanh'
OPTIMIZER='adam'
EPOCHS=68
BATCHSIZE=45
VALIDATIONSPLIT=0.1
# Code to read csv file into Colaboratory:
# from google.colab import files
# uploaded = files.upload()
# import io
# df = pd.read_csv(io.BytesIO(uploaded['SentDATA.csv']))
# Dataset is now stored in a Pandas Dataframe
df = pd.read_csv('../../data/dadesTFM.csv')
df.reset_index(inplace=True)
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
columns = ['PM1','PM25','PM10','PM1ATM','PM25ATM','PM10ATM']
df1 = df.copy();
df1 = df1.rename(columns={"PM 1":"PM1","PM 2.5":"PM25","PM 10":"PM10","PM 1 ATM":"PM1ATM","PM 2.5 ATM":"PM25ATM","PM 10 ATM":"PM10ATM"})
df1['PM1'] = df['PM 1'].astype(np.float32)
df1['PM25'] = df['PM 2.5'].astype(np.float32)
df1['PM10'] = df['PM 10'].astype(np.float32)
df1['PM1ATM'] = df['PM 1 ATM'].astype(np.float32)
df1['PM25ATM'] = df['PM 2.5 ATM'].astype(np.float32)
df1['PM10ATM'] = df['PM 10 ATM'].astype(np.float32)
df2 = df1.copy()
train_size = int(len(df2) * 0.8)
test_size = len(df2) - train_size
train, test = df2.iloc[0:train_size], df2.iloc[train_size:len(df2)]
train.shape, test.shape
((3117, 7), (780, 7))
#Standardize the data
for col in columns:
scaler = StandardScaler()
train[col] = scaler.fit_transform(train[[col]])
<ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]])
def create_sequences(X, y, time_steps=TIME_STEPS):
Xs, ys = [], []
for i in range(len(X)-time_steps):
Xs.append(X.iloc[i:(i+time_steps)].values)
ys.append(y.iloc[i+time_steps])
return np.array(Xs), np.array(ys)
X_train, y_train = create_sequences(train[[columns[1]]], train[columns[1]])
#X_test, y_test = create_sequences(test[[columns[1]]], test[columns[1]])
print(f'X_train shape: {X_train.shape}')
print(f'y_train shape: {y_train.shape}')
X_train shape: (2973, 144, 1) y_train shape: (2973,)
#afegir nova mètrica
def rmse(y_true, y_pred):
return backend.sqrt(backend.mean(backend.square(y_pred - y_true), axis=-1))
model = Sequential()
model.add(CMODEL(units = UNITS, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dropout(rate=DROPOUT))
model.add(TimeDistributed(Dense(1,kernel_initializer='normal',activation=ACTIVATION)))
model.compile(optimizer=OPTIMIZER, loss='mae',metrics=['mse',rmse])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm (LSTM) (None, 144, 40) 6720 _________________________________________________________________ dropout (Dropout) (None, 144, 40) 0 _________________________________________________________________ time_distributed (TimeDistri (None, 144, 1) 41 ================================================================= Total params: 6,761 Trainable params: 6,761 Non-trainable params: 0 _________________________________________________________________
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCHSIZE, validation_split=VALIDATIONSPLIT,
callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, mode='min')], shuffle=False)
Epoch 1/68 60/60 [==============================] - 3s 54ms/step - loss: 0.7663 - mse: 0.9742 - rmse: 0.7719 - val_loss: 0.4472 - val_mse: 0.2293 - val_rmse: 0.4607 Epoch 2/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6856 - mse: 0.8132 - rmse: 0.7230 - val_loss: 0.3435 - val_mse: 0.1448 - val_rmse: 0.3617 Epoch 3/68 60/60 [==============================] - 2s 41ms/step - loss: 0.6735 - mse: 0.7946 - rmse: 0.7223 - val_loss: 0.3190 - val_mse: 0.1277 - val_rmse: 0.3379 Epoch 4/68 60/60 [==============================] - 2s 40ms/step - loss: 0.6667 - mse: 0.7839 - rmse: 0.7206 - val_loss: 0.3104 - val_mse: 0.1218 - val_rmse: 0.3292 Epoch 5/68 60/60 [==============================] - 2s 38ms/step - loss: 0.6629 - mse: 0.7757 - rmse: 0.7195 - val_loss: 0.3069 - val_mse: 0.1193 - val_rmse: 0.3255 Epoch 6/68 60/60 [==============================] - 2s 39ms/step - loss: 0.6595 - mse: 0.7692 - rmse: 0.7183 - val_loss: 0.3043 - val_mse: 0.1176 - val_rmse: 0.3229 Epoch 7/68 60/60 [==============================] - 2s 40ms/step - loss: 0.6560 - mse: 0.7630 - rmse: 0.7168 - val_loss: 0.2990 - val_mse: 0.1146 - val_rmse: 0.3185 Epoch 8/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6531 - mse: 0.7590 - rmse: 0.7160 - val_loss: 0.2932 - val_mse: 0.1116 - val_rmse: 0.3139 Epoch 9/68 60/60 [==============================] - 3s 45ms/step - loss: 0.6502 - mse: 0.7558 - rmse: 0.7153 - val_loss: 0.2856 - val_mse: 0.1078 - val_rmse: 0.3080 Epoch 10/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6478 - mse: 0.7534 - rmse: 0.7150 - val_loss: 0.2793 - val_mse: 0.1047 - val_rmse: 0.3032 Epoch 11/68 60/60 [==============================] - 2s 39ms/step - loss: 0.6459 - mse: 0.7517 - rmse: 0.7148 - val_loss: 0.2732 - val_mse: 0.1018 - val_rmse: 0.2986 Epoch 12/68 60/60 [==============================] - 2s 41ms/step - loss: 0.6443 - mse: 0.7503 - rmse: 0.7147 - val_loss: 0.2681 - val_mse: 0.0996 - val_rmse: 0.2948 Epoch 13/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6431 - mse: 0.7491 - rmse: 0.7148 - val_loss: 0.2642 - val_mse: 0.0980 - val_rmse: 0.2922 Epoch 14/68 60/60 [==============================] - 2s 42ms/step - loss: 0.6421 - mse: 0.7483 - rmse: 0.7148 - val_loss: 0.2614 - val_mse: 0.0970 - val_rmse: 0.2905 Epoch 15/68 60/60 [==============================] - 2s 39ms/step - loss: 0.6412 - mse: 0.7475 - rmse: 0.7148 - val_loss: 0.2587 - val_mse: 0.0959 - val_rmse: 0.2886 Epoch 16/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6403 - mse: 0.7467 - rmse: 0.7146 - val_loss: 0.2568 - val_mse: 0.0951 - val_rmse: 0.2873 Epoch 17/68 60/60 [==============================] - 2s 42ms/step - loss: 0.6397 - mse: 0.7464 - rmse: 0.7147 - val_loss: 0.2542 - val_mse: 0.0940 - val_rmse: 0.2853 Epoch 18/68 60/60 [==============================] - 2s 41ms/step - loss: 0.6390 - mse: 0.7458 - rmse: 0.7147 - val_loss: 0.2531 - val_mse: 0.0937 - val_rmse: 0.2846 Epoch 19/68 60/60 [==============================] - 2s 39ms/step - loss: 0.6388 - mse: 0.7455 - rmse: 0.7147 - val_loss: 0.2525 - val_mse: 0.0936 - val_rmse: 0.2846 Epoch 20/68 60/60 [==============================] - 3s 49ms/step - loss: 0.6380 - mse: 0.7449 - rmse: 0.7145 - val_loss: 0.2507 - val_mse: 0.0928 - val_rmse: 0.2831 Epoch 21/68 60/60 [==============================] - 2s 40ms/step - loss: 0.6377 - mse: 0.7445 - rmse: 0.7144 - val_loss: 0.2495 - val_mse: 0.0923 - val_rmse: 0.2822 Epoch 22/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6373 - mse: 0.7441 - rmse: 0.7142 - val_loss: 0.2483 - val_mse: 0.0918 - val_rmse: 0.2813 Epoch 23/68 60/60 [==============================] - 3s 45ms/step - loss: 0.6366 - mse: 0.7431 - rmse: 0.7137 - val_loss: 0.2477 - val_mse: 0.0915 - val_rmse: 0.2808 Epoch 24/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6364 - mse: 0.7423 - rmse: 0.7131 - val_loss: 0.2487 - val_mse: 0.0920 - val_rmse: 0.2817 Epoch 25/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6356 - mse: 0.7410 - rmse: 0.7118 - val_loss: 0.2473 - val_mse: 0.0913 - val_rmse: 0.2805 Epoch 26/68 60/60 [==============================] - 2s 39ms/step - loss: 0.6354 - mse: 0.7414 - rmse: 0.7116 - val_loss: 0.2458 - val_mse: 0.0906 - val_rmse: 0.2792 Epoch 27/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6344 - mse: 0.7387 - rmse: 0.7094 - val_loss: 0.2469 - val_mse: 0.0907 - val_rmse: 0.2795 Epoch 28/68 60/60 [==============================] - 2s 41ms/step - loss: 0.6345 - mse: 0.7396 - rmse: 0.7092 - val_loss: 0.2450 - val_mse: 0.0898 - val_rmse: 0.2777 Epoch 29/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6350 - mse: 0.7404 - rmse: 0.7089 - val_loss: 0.2503 - val_mse: 0.0917 - val_rmse: 0.2810 Epoch 30/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6338 - mse: 0.7371 - rmse: 0.7098 - val_loss: 0.2387 - val_mse: 0.0869 - val_rmse: 0.2722 Epoch 31/68 60/60 [==============================] - 2s 41ms/step - loss: 0.6313 - mse: 0.7337 - rmse: 0.7055 - val_loss: 0.2446 - val_mse: 0.0880 - val_rmse: 0.2742 Epoch 32/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6310 - mse: 0.7315 - rmse: 0.7038 - val_loss: 0.2393 - val_mse: 0.0859 - val_rmse: 0.2704 Epoch 33/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6361 - mse: 0.7444 - rmse: 0.7105 - val_loss: 0.2408 - val_mse: 0.0872 - val_rmse: 0.2728 Epoch 34/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6289 - mse: 0.7285 - rmse: 0.7013 - val_loss: 0.2401 - val_mse: 0.0860 - val_rmse: 0.2707 Epoch 35/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6349 - mse: 0.7398 - rmse: 0.7101 - val_loss: 0.2381 - val_mse: 0.0852 - val_rmse: 0.2687 Epoch 36/68 60/60 [==============================] - 2s 40ms/step - loss: 0.6299 - mse: 0.7286 - rmse: 0.7038 - val_loss: 0.2366 - val_mse: 0.0846 - val_rmse: 0.2678 Epoch 37/68 60/60 [==============================] - 3s 45ms/step - loss: 0.6301 - mse: 0.7300 - rmse: 0.7052 - val_loss: 0.2294 - val_mse: 0.0803 - val_rmse: 0.2590 0. Epoch 38/68 60/60 [==============================] - 2s 38ms/step - loss: 0.6303 - mse: 0.7291 - rmse: 0.7044 - val_loss: 0.2364 - val_mse: 0.0841 - val_rmse: 0.2667 Epoch 39/68 60/60 [==============================] - 3s 47ms/step - loss: 0.6284 - mse: 0.7259 - rmse: 0.7014 - val_loss: 0.2562 - val_mse: 0.0915 - val_rmse: 0.2803 Epoch 40/68 60/60 [==============================] - 3s 47ms/step - loss: 0.6370 - mse: 0.7459 - rmse: 0.7171 - val_loss: 0.1736 - val_mse: 0.0594 - val_rmse: 0.2076 Epoch 41/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6491 - mse: 0.7521 - rmse: 0.7259 - val_loss: 0.2544 - val_mse: 0.0993 - val_rmse: 0.2935 Epoch 42/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6329 - mse: 0.7354 - rmse: 0.7103 - val_loss: 0.2399 - val_mse: 0.0909 - val_rmse: 0.2789 Epoch 43/68 60/60 [==============================] - 3s 43ms/step - loss: 0.6299 - mse: 0.7319 - rmse: 0.7057 - val_loss: 0.2386 - val_mse: 0.0892 - val_rmse: 0.2762 Epoch 44/68 60/60 [==============================] - 3s 42ms/step - loss: 0.6300 - mse: 0.7344 - rmse: 0.7051 - val_loss: 0.2365 - val_mse: 0.0876 - val_rmse: 0.2734 Epoch 45/68 60/60 [==============================] - 3s 44ms/step - loss: 0.6289 - mse: 0.7323 - rmse: 0.7034 - val_loss: 0.2336 - val_mse: 0.0855 - val_rmse: 0.2695
import matplotlib.pyplot as plt
plt.plot(history.history['loss'], label='MAE Training loss')
plt.plot(history.history['val_loss'], label='MAE Validation loss')
plt.plot(history.history['mse'], label='MSE Training loss')
plt.plot(history.history['val_mse'], label='MSE Validation loss')
plt.plot(history.history['rmse'], label='RMSE Training loss')
plt.plot(history.history['val_rmse'], label='RMSE Validation loss')
plt.legend();
X_train_pred = model.predict(X_train, verbose=0)
train_mae_loss = np.mean(np.abs(X_train_pred - X_train), axis=1)
plt.hist(train_mae_loss, bins=50)
plt.xlabel('Train MAE loss')
plt.ylabel('Number of Samples');
def evaluate_prediction(predictions, actual, model_name):
errors = predictions - actual
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
print(model_name + ':')
print('Mean Absolute Error: {:.4f}'.format(mae))
print('Root Mean Square Error: {:.4f}'.format(rmse))
print('Mean Square Error: {:.4f}'.format(mse))
print('')
return mae,rmse,mse
mae,rmse,mse = evaluate_prediction(X_train_pred, X_train,"LSTM")
LSTM: Mean Absolute Error: 0.2994 Root Mean Square Error: 0.5126 Mean Square Error: 0.2628
model.save(MODELFILENAME+'.h5')
#càlcul del threshold de test
def calculate_threshold(X_test, X_test_pred):
distance = np.sqrt(np.mean(np.square(X_test_pred - X_test),axis=1))
"""Sorting the scores/diffs and using a 0.80 as cutoff value to pick the threshold"""
distance.sort();
cut_off = int(0.9 * len(distance));
threshold = distance[cut_off];
return threshold
for col in columns:
print ("####################### "+col +" ###########################")
#Standardize the test data
scaler = StandardScaler()
test_cpy = test.copy()
test[col] = scaler.fit_transform(test[[col]])
#creem seqüencia amb finestra temporal per les dades de test
X_test1, y_test1 = create_sequences(test[[col]], test[col])
print(f'Testing shape: {X_test1.shape}')
#evaluem el model
eval = model.evaluate(X_test1, y_test1)
print("evaluate: ",eval)
#predim el model
X_test1_pred = model.predict(X_test1, verbose=0)
evaluate_prediction(X_test1_pred, X_test1,"LSTM")
#càlcul del mae_loss
test1_mae_loss = np.mean(np.abs(X_test1_pred - X_test1), axis=1)
test1_rmse_loss = np.sqrt(np.mean(np.square(X_test1_pred - X_test1),axis=1))
# reshaping test prediction
X_test1_predReshape = X_test1_pred.reshape((X_test1_pred.shape[0] * X_test1_pred.shape[1]), X_test1_pred.shape[2])
# reshaping test data
X_test1Reshape = X_test1.reshape((X_test1.shape[0] * X_test1.shape[1]), X_test1.shape[2])
threshold_test = calculate_threshold(X_test1Reshape,X_test1_predReshape)
test1_score_df = pd.DataFrame(test[TIME_STEPS:])
test1_score_df['loss'] = test1_rmse_loss.reshape((-1))
test1_score_df['threshold'] = threshold_test
test1_score_df['anomaly'] = test1_score_df['loss'] > test1_score_df['threshold']
test1_score_df[col] = test[TIME_STEPS:][col]
#gràfic test lost i threshold
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['loss'], name='Test loss'))
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['threshold'], name='Threshold'))
fig.update_layout(showlegend=True, title='Test loss vs. Threshold')
fig.show()
#Posem les anomalies en un array
anomalies1 = test1_score_df.loc[test1_score_df['anomaly'] == True]
anomalies1.shape
print('anomalies: ',anomalies1.shape); print();
#Gràfic dels punts i de les anomalíes amb els valors de dades transformades per verificar que la normalització que s'ha fet no distorssiona les dades
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=scaler.inverse_transform(test1_score_df[col]), name=col))
fig.add_trace(go.Scatter(x=anomalies1.index, y=scaler.inverse_transform(anomalies1[col]), mode='markers', name='Anomaly'))
fig.update_layout(showlegend=True, title='Detected anomalies')
fig.show()
print ("######################################################")
####################### PM1 ########################### Testing shape: (636, 144, 1) 11/20 [===============>..............] - ETA: 0s - loss: 0.6851 - mse: 0.9922 - rmse: 0.7555
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy test[col] = scaler.fit_transform(test[[col]])
20/20 [==============================] - 0s 11ms/step - loss: 0.7046 - mse: 1.0686 - rmse: 0.7857 evaluate: [0.7045837044715881, 1.068647861480713, 0.7856631875038147] LSTM: Mean Absolute Error: 0.2746 Root Mean Square Error: 0.5586 Mean Square Error: 0.3120
anomalies: (213, 10)
###################################################### ####################### PM25 ########################### Testing shape: (636, 144, 1) 6/20 [========>.....................] - ETA: 0s - loss: 0.6400 - mse: 1.0720 - rmse: 0.6842
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 9ms/step - loss: 0.7456 - mse: 1.1604 - rmse: 0.8293 evaluate: [0.7456485629081726, 1.16038978099823, 0.8293230533599854] LSTM: Mean Absolute Error: 0.2874 Root Mean Square Error: 0.5440 Mean Square Error: 0.2959
anomalies: (100, 10)
###################################################### ####################### PM10 ########################### Testing shape: (636, 144, 1) 6/20 [========>.....................] - ETA: 0s - loss: 0.6546 - mse: 1.0911 - rmse: 0.6973
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 10ms/step - loss: 0.7719 - mse: 1.2226 - rmse: 0.8576 evaluate: [0.771871030330658, 1.2225751876831055, 0.8576290011405945] LSTM: Mean Absolute Error: 0.2964 Root Mean Square Error: 0.5323 Mean Square Error: 0.2834
anomalies: (25, 10)
###################################################### ####################### PM1ATM ########################### Testing shape: (636, 144, 1) 6/20 [========>.....................] - ETA: 0s - loss: 0.6595 - mse: 1.0481 - rmse: 0.7104
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 10ms/step - loss: 0.7720 - mse: 1.1343 - rmse: 0.8611 evaluate: [0.7719912528991699, 1.134268045425415, 0.8611096143722534] LSTM: Mean Absolute Error: 0.3000 Root Mean Square Error: 0.5295 Mean Square Error: 0.2803
anomalies: (0, 10)
###################################################### ####################### PM25ATM ########################### Testing shape: (636, 144, 1) 7/20 [=========>....................] - ETA: 0s - loss: 0.6695 - mse: 1.0081 - rmse: 0.7263
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 10ms/step - loss: 0.7625 - mse: 1.1129 - rmse: 0.8501 evaluate: [0.7625170350074768, 1.112905740737915, 0.850103497505188] LSTM: Mean Absolute Error: 0.2948 Root Mean Square Error: 0.5302 Mean Square Error: 0.2812
anomalies: (47, 10)
###################################################### ####################### PM10ATM ########################### Testing shape: (636, 144, 1) 6/20 [========>.....................] - ETA: 0s - loss: 0.6469 - mse: 1.0286 - rmse: 0.6900
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 11ms/step - loss: 0.7591 - mse: 1.1155 - rmse: 0.8440 evaluate: [0.7591349482536316, 1.1155035495758057, 0.8440005779266357] LSTM: Mean Absolute Error: 0.2927 Root Mean Square Error: 0.5287 Mean Square Error: 0.2795
anomalies: (0, 10)
######################################################